1. Reading the dataset

library(readr)
dataset <- read_csv("new_dataset.csv")
head(dataset)

2. Splitting the dataset

We will split the dataset into a training set (70%) and a testing set (30%).

set.seed(123) # for reproducibility
sample_index <- sample(1:nrow(dataset), 0.7*nrow(dataset))
train_data <- dataset[sample_index, ]
test_data <- dataset[-sample_index, ]

3. Creating a regression model

Using the ranger package, we’ll predict the tmg feature and using permutation

library(ranger)
model_per <- ranger(tmg ~ ., data = train_data, importance = 'permutation')
model_per

4. Calculate importance with permutation

library(dplyr)
library(ggplot2)

plot_perm <- importance(model_per) |> as.data.frame() |> add_rownames("predictor") |> mutate(importance=`importance(model_per)`) |> select(predictor,importance)|> arrange(desc(importance)) |> head(20) |> mutate(predictor = factor(predictor, levels = rev(unique(predictor))))  |>
  ggplot()+
  geom_col(aes(y=predictor,x=importance),fill='darkblue', color='gray')+
  ggtitle("Top 20 predictor importance using permutation")+
  theme_minimal()
plot_perm

5. calculate importante with impurity

library(ranger)
model_imp <- ranger(tmg ~ ., data = train_data, importance = 'impurity')
model_imp
plot_imp<-importance(model_imp) |> as.data.frame() |> add_rownames("predictor") |> mutate(importance=`importance(model_imp)`) |> select(predictor,importance)|> arrange(desc(importance)) |> head(20) |> mutate(predictor = factor(predictor, levels = rev(unique(predictor))))  |>
  ggplot()+
  geom_col(aes(y=predictor,x=importance),fill='darkblue', color='gray')+
  ggtitle("Top 20 predictor importance using impurity")+
  theme_minimal()

Permutation vs. Impurity

library(gridExtra)
gridExtra::grid.arrange(plot_imp,plot_perm, ncol=2)
plot_imp
plot_perm

5. Evaluate results on test dataset

predictions <- predict(model_per, data = test_data)$predictions
# Compute the RMSE (Root Mean Square Error)
RMSE <- sqrt(mean((predictions - test_data$tmg)^2))
RMSE
predictions <- predict(model_per, data = dataset)$predictions
# Compute the RMSE (Root Mean Square Error)
RMSE <- sqrt(mean((predictions - dataset$tmg)^2))
RMSE

6. Plot: Predicted vs Reference values

library(ggplot2)

predictions <- predict(model_per, data = test_data)$predictions
results <- data.frame(Reference = test_data$tmg, Predicted = predictions)
ggplot(results, aes(x = Reference, y = Predicted)) +
  geom_point(color='blue') +
  #geom_smooth(method = 'lm', color = 'red') +
  geom_abline(intercept = 0,slope =1,color='red')+
  ggtitle("Predicted vs Reference values") +
  #ylim(0,1)+
  xlab("Reference Values") +
  ylab("Predicted Values")+
  theme_bw()

predictions <- predict(model_per, data = train_data)$predictions
results <- data.frame(Reference = train_data$tmg, Predicted = predictions)
ggplot(results, aes(x = Reference, y = Predicted)) +
  geom_point(color='blue') +
  #geom_smooth(method = 'lm', color = 'red') +
  geom_abline(intercept = 0,slope =1,color='red')+
  ggtitle("Predicted vs Reference values") +
  #ylim(0,1)+
  xlab("Reference Values") +
  ylab("Predicted Values")+
  theme_bw()

predictions <- predict(model_per, data = dataset)$predictions
results <- data.frame(Reference = dataset$tmg, Predicted = predictions)
ggplot(results, aes(x = Reference, y = Predicted)) +
  geom_point(color='blue') +
  #geom_smooth(method = 'lm', color = 'red') +
  geom_abline(intercept = 0,slope =1,color='red')+
  ggtitle("Predicted vs Reference values") +
  #ylim(0,1)+
  xlab("Reference Values") +
  ylab("Predicted Values")+
  theme_bw()
# Install and load necessary packages
# install.packages("ranger")
# install.packages("caret")
library(ranger)
library(caret)
library(readr)
library(caret)
library(ranger)
library(caret)
library(dplyr)


library(readr)
library(caret)
Loading required package: ggplot2
Loading required package: lattice
library(ranger)
library(caret)
library(dplyr)

Attaching package: ‘dplyr’

The following objects are masked from ‘package:stats’:

    filter, lag

The following objects are masked from ‘package:base’:

    intersect, setdiff, setequal, union
dataset <- read_csv("new_dataset.csv")
Rows: 568 Columns: 648── Column specification ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
Delimiter: ","
chr   (1): name
dbl (647): psd_1, psd_2, psd_3, psd_4, psd_5, psd_6, psd_7, psd_8, psd_9, psd_10, psd_11, psd_12, psd_13, psd_14, psd_15, psd_16, psd_17, psd_18, psd_19, psd_20, psd_21, psd_22, psd_23,...
ℹ Use `spec()` to retrieve the full column specification for this data.
ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
nzv <- nearZeroVar(dataset, saveMetrics = FALSE)
dataset <- dataset[, -nzv]
dataset$name <- NULL
set.seed(123) # for reproducibility
sample_index <- sample(1:nrow(dataset), 0.7*nrow(dataset))
train_data <- dataset[sample_index, ]
test_data <- dataset[-sample_index, ]
# mod_2=rgcv(tmg ~ .,data = train_data, importance = 'permutation', cv.fold=10)
model_per <- train(
  tmg ~ .,
  data = train_data,
  method = "ranger",
  trControl = trainControl(method = "cv", number = 10),
  importance = "permutation"
)
# perm_imp <- permutationFeature(model_per, metric = "RMSE", trControl = ctrl)

# model_per <- ranger(tmg ~ .,data = train_data, importance = 'permutation')

do_plot <- function(data, type) {
  predictions <- predict(model_per, data)
  print(data)
  print(predictions)
  print(data$tmg)
  RMSE <- sqrt(mean((predictions - data$tmg)^2))
  results <- data.frame(Reference = data$tmg, Predicted = as.vector(predictions))
  return(ggplot(results, aes(x = Reference, y = Predicted)) +
    geom_point(color='blue') +
    geom_abline(intercept = 0, slope = 1, color='red') +
    ggtitle(paste(type, "Random Forest; RMSE:", RMSE)) +
    xlab("Reference Values") +
    ylab("Predicted Values") +
    theme_bw())
}
do_plot(test_data, "Test")
  [1] 0.2689269 0.4053836 0.3649996 0.3667678 0.3655683 0.3632867 0.3644209 0.3538282 0.3481348 0.3520358 0.2509988 0.2582524 0.2423742 0.2702699 0.2320738 0.2624782 0.2573334 0.2534757
 [19] 0.4355293 0.4039390 0.4052275 0.3981909 0.4023658 0.4070284 0.4013852 0.4037824 0.4374316 0.4402415 0.4429377 0.4425301 0.4406133 0.4429935 0.4418933 0.4424070 0.3936540 0.4019863
 [37] 0.4031244 0.3733902 0.3283978 0.3555004 0.3969559 0.3450029 0.3259292 0.3420153 0.3383234 0.3451772 0.3478461 0.3526765 0.3430144 0.3446427 0.3341462 0.3891644 0.4637248 0.3492502
 [55] 0.3741866 0.3745212 0.3693188 0.3760095 0.3670509 0.3733676 0.3731008 0.3797560 0.4218498 0.4209190 0.4210648 0.4214584 0.3809959 0.3766756 0.3782384 0.4339579 0.2823448 0.2630980
 [73] 0.2755730 0.2577974 0.4198652 0.4244795 0.4206386 0.4262408 0.4218937 0.4178009 0.3916835 0.3930787 0.3927441 0.3925250 0.3875441 0.3979316 0.3691211 0.3988731 0.3948259 0.3590153
 [91] 0.3595331 0.3533867 0.4846511 0.2310049 0.2133585 0.2255054 0.4201381 0.4239103 0.4178412 0.4110824 0.3562412 0.3442412 0.3515870 0.3597254 0.3563275 0.3526094 0.3524258 0.3509225
[109] 0.4186149 0.4196975 0.4220581 0.4237472 0.4237661 0.4246864 0.4252443 0.4259567 0.2363466 0.2322584 0.2338668 0.2313073 0.2574623 0.2548630 0.2602922 0.2597568 0.2266073 0.2271369
[127] 0.2221563 0.2232829 0.2573293 0.2662636 0.2580088 0.2470324 0.4281665 0.4082926 0.4060756 0.4070808 0.4083692 0.3539352 0.3563792 0.3563985 0.3574177 0.3612098 0.3551919 0.4071818
[145] 0.4031748 0.4107778 0.4057403 0.2605003 0.2599739 0.2659934 0.2595775 0.2646526 0.2620849 0.2688872 0.2634987 0.2582253 0.4829680 0.4506270 0.4503725 0.2692258 0.2754180 0.4301141
[163] 0.4779121 0.4761561 0.4372146 0.3746027 0.4324379 0.3444640 0.2238289 0.4754487 0.4089247
  [1] 0.2100 0.4400 0.3700 0.3800 0.3800 0.3800 0.3700 0.3600 0.3500 0.3400 0.2800 0.2700 0.2300 0.2400 0.2200 0.2500 0.2100 0.2400 0.4800 0.4100 0.4000 0.4000 0.4100 0.4000 0.4100 0.4100
 [27] 0.4500 0.4400 0.4400 0.4400 0.4500 0.4400 0.4500 0.4500 0.4100 0.4200 0.4000 0.4300 0.3700 0.4200 0.3000 0.3400 0.3400 0.3600 0.3300 0.3600 0.3400 0.3500 0.3300 0.3200 0.3200 0.3000
 [53] 0.4600 0.3800 0.3800 0.3700 0.3900 0.3900 0.3700 0.3500 0.3800 0.3800 0.4300 0.4300 0.4300 0.4300 0.4000 0.3800 0.3900 0.4900 0.3200 0.2500 0.2900 0.2400 0.4300 0.4300 0.4300 0.4300
 [79] 0.4300 0.4300 0.3800 0.3800 0.3900 0.3900 0.3800 0.4300 0.3200 0.3800 0.2900 0.4300 0.3600 0.3900 0.5000 0.1900 0.2800 0.1300 0.4100 0.4300 0.4100 0.4200 0.3800 0.3600 0.3800 0.3800
[105] 0.3500 0.3500 0.3700 0.3600 0.4300 0.4000 0.4100 0.4200 0.4000 0.4300 0.4000 0.4100 0.2200 0.2700 0.2000 0.2400 0.2700 0.2400 0.3100 0.1600 0.2800 0.2600 0.2000 0.2300 0.2600 0.3000
[131] 0.2800 0.2200 0.4400 0.4000 0.4100 0.4000 0.4000 0.3500 0.3500 0.3600 0.3700 0.3600 0.3800 0.4000 0.4100 0.3900 0.4000 0.3000 0.1500 0.3100 0.2600 0.2700 0.2100 0.3100 0.2200 0.2900
[157] 0.5266 0.4806 0.4900 0.1926 0.1979 0.4837 0.4868 0.4965 0.4148 0.4023 0.3370 0.3539 0.2480 0.5105 0.4037

do_plot(train_data, "Train")
  [1] 0.2871247 0.4081337 0.3826972 0.2377298 0.4016557 0.4357830 0.3524646 0.2504880 0.4615884 0.3672767 0.3558274 0.4458457 0.4255670 0.3530934 0.3368018 0.2797827 0.2251930 0.4264190
 [19] 0.3419663 0.3470506 0.3523341 0.3646444 0.3503391 0.3699822 0.2610450 0.3616606 0.3503485 0.3865345 0.3659097 0.4078245 0.3474856 0.3358104 0.3968721 0.3804572 0.4483053 0.2753050
 [37] 0.2112019 0.4413384 0.4517705 0.3783560 0.2668398 0.4078221 0.4685421 0.2016838 0.3293283 0.4142457 0.4318029 0.3544025 0.4079328 0.3690654 0.4122699 0.2465563 0.3673308 0.5016549
 [55] 0.4451141 0.3887534 0.2176683 0.3487803 0.3672408 0.1910486 0.2789166 0.3493443 0.3958754 0.3514727 0.3980844 0.3641106 0.2369651 0.3649318 0.3580202 0.4193415 0.4015868 0.3560847
 [73] 0.3659287 0.4234297 0.4394534 0.3482001 0.3766881 0.2478012 0.4446557 0.3562384 0.3419427 0.2818055 0.4456081 0.1912018 0.3750578 0.4326722 0.4045210 0.3903895 0.3610422 0.3893311
 [91] 0.2709527 0.3585730 0.3672459 0.2696114 0.3181830 0.3504564 0.2313074 0.3611232 0.3518126 0.5090696 0.4078832 0.4057196 0.4297858 0.3943910 0.3414150 0.3410438 0.4413633 0.4216207
[109] 0.3525913 0.4030051 0.3516369 0.4878344 0.3725990 0.2618794 0.3958660 0.4034735 0.4057248 0.4309828 0.4846070 0.2118861 0.2577723 0.2793681 0.4292147 0.4168836 0.2714244 0.3573873
[127] 0.3971263 0.3870580 0.2655985 0.4377528 0.4390349 0.3802004 0.3566058 0.2488558 0.4569518 0.4303637 0.3816910 0.4010004 0.4399207 0.2634908 0.4194226 0.2432385 0.3426440 0.3603614
[145] 0.4731791 0.4267103 0.3615027 0.1961402 0.3581327 0.3470920 0.4237846 0.2102208 0.4937867 0.4464279 0.3725019 0.4142353 0.4326623 0.3525644 0.3709085 0.3448214 0.4992390 0.4036429
[163] 0.2631131 0.4272854 0.4281141 0.4317613 0.3573665 0.4226035 0.3623376 0.3640332 0.3612580 0.4315841 0.3495268 0.2184150 0.3666888 0.2225038 0.4354059 0.3977908 0.3986227 0.3811427
[181] 0.4076734 0.3451853 0.3324869 0.2236381 0.4838027 0.4524149 0.2353579 0.3888018 0.3707591 0.4267259 0.4099445 0.4291577 0.3769071 0.4272276 0.4195955 0.3676444 0.3921727 0.3477877
[199] 0.2594514 0.4782895 0.3697770 0.4149692 0.2393905 0.4825637 0.4040740 0.3678768 0.5012013 0.3915276 0.3248728 0.4251447 0.3999874 0.3786974 0.4101786 0.4101221 0.4259970 0.4231428
[217] 0.4142954 0.3460745 0.3738009 0.4600988 0.4094892 0.4082986 0.3532721 0.3741668 0.4020766 0.3531541 0.4144587 0.3303721 0.3375340 0.2426554 0.3279041 0.3996086 0.3986557 0.3523415
[235] 0.3767642 0.3988434 0.3450894 0.3583992 0.2748303 0.3172573 0.2219365 0.2107871 0.3433137 0.3766790 0.2275663 0.3661834 0.4652893 0.2509934 0.3524690 0.4014159 0.1961742 0.1852804
[253] 0.4264948 0.4411043 0.4274671 0.2582569 0.4689274 0.3488020 0.3941227 0.3555659 0.3772977 0.4331665 0.3908616 0.1926758 0.2416522 0.2739350 0.3967536 0.4410547 0.2200760 0.4011318
[271] 0.3460387 0.2298082 0.3959045 0.3701122 0.4205830 0.2841555 0.4016048 0.4399034 0.2515936 0.3997106 0.3959717 0.4463507 0.2559964 0.1962145 0.2473089 0.2087417 0.2226538 0.4111853
[289] 0.2430133 0.2224785 0.2811484 0.3713830 0.2411744 0.3450342 0.2366318 0.4662495 0.3516145 0.4419756 0.3692998 0.3845447 0.3712363 0.2479043 0.4188207 0.4205333 0.4425131 0.3598113
[307] 0.4148218 0.3678192 0.2434608 0.4134862 0.3470594 0.3547905 0.3799720 0.2169570 0.3540527 0.3927499 0.4089690 0.2038637 0.4203946 0.2572152 0.3523329 0.4128596 0.4260638 0.3390340
[325] 0.4936290 0.3571270 0.3647404 0.2095205 0.3668304 0.3011025 0.4184188 0.3355538 0.4356104 0.4635185 0.2182548 0.4591063 0.4123895 0.3860391 0.2595910 0.3555031 0.4019427 0.3466801
[343] 0.4414926 0.5060413 0.3988787 0.4411182 0.4367580 0.4036836 0.2205215 0.3497740 0.3413632 0.3604704 0.4376736 0.4650387 0.2086093 0.2118318 0.3822077 0.2392990 0.3423928 0.3424705
[361] 0.3877463 0.3696465 0.4010527 0.4765927 0.3379579 0.2859952 0.3931068 0.4765927 0.3919439 0.1923227 0.4263084 0.3748105 0.3681300 0.2516127 0.3678701 0.4091938 0.2414272 0.4389744
[379] 0.2600344 0.3529609 0.4161375 0.2195732 0.3509037 0.3603447 0.4154150 0.3838294 0.4111321 0.3609444 0.2735856 0.3790042 0.4046832 0.1955575 0.2544728 0.2671579 0.4679386 0.3983077
[397] 0.4337486
  [1] 0.3100 0.4100 0.3900 0.2200 0.4500 0.4400 0.3500 0.2400 0.4600 0.3700 0.3600 0.4500 0.4300 0.3500 0.3300 0.2900 0.2200 0.4300 0.3300 0.3500 0.3623 0.3700 0.3500 0.3800 0.2000 0.3500
 [27] 0.3500 0.3900 0.3800 0.4100 0.3500 0.3300 0.4100 0.4200 0.4485 0.3100 0.2000 0.4400 0.4700 0.3800 0.2700 0.4100 0.4900 0.1700 0.3200 0.4100 0.4305 0.3600 0.4100 0.3700 0.4200 0.2600
 [53] 0.3700 0.5000 0.4500 0.3700 0.1900 0.3500 0.4200 0.1600 0.3000 0.3200 0.3900 0.3600 0.4000 0.3700 0.2400 0.3700 0.3600 0.4200 0.4000 0.3700 0.3700 0.4300 0.4400 0.3500 0.3500 0.2200
 [79] 0.4500 0.3100 0.3300 0.3000 0.4500 0.1700 0.3800 0.4400 0.4000 0.3900 0.3400 0.3900 0.2700 0.3600 0.3700 0.2800 0.3200 0.3500 0.2100 0.3700 0.3500 0.5200 0.4100 0.4100 0.4300 0.4247
[105] 0.3300 0.3400 0.4400 0.4200 0.3600 0.4000 0.3500 0.5045 0.3800 0.2800 0.4000 0.4000 0.4200 0.4438 0.4900 0.2100 0.2600 0.2800 0.4300 0.4100 0.2800 0.3600 0.3900 0.4000 0.2800 0.4400
[131] 0.4400 0.3800 0.3600 0.2500 0.4700 0.4300 0.4000 0.4000 0.4400 0.2800 0.4200 0.2400 0.3400 0.3600 0.4794 0.4300 0.3700 0.1300 0.3600 0.3400 0.4700 0.1900 0.5000 0.4500 0.3800 0.4200
[157] 0.4400 0.3500 0.3800 0.3400 0.5400 0.4000 0.2900 0.4300 0.4300 0.4400 0.3600 0.4200 0.3600 0.3700 0.3600 0.4400 0.3500 0.2100 0.3700 0.2100 0.4400 0.4000 0.4000 0.3900 0.4100 0.3400
[183] 0.3200 0.2300 0.4900 0.4546 0.2200 0.3700 0.3700 0.4300 0.4200 0.4300 0.3768 0.4300 0.4200 0.3800 0.3900 0.3500 0.2700 0.4900 0.3700 0.4200 0.2500 0.4800 0.4000 0.3700 0.5100 0.3900
[209] 0.2800 0.4300 0.4200 0.3800 0.4100 0.4000 0.4300 0.4400 0.4100 0.3400 0.3800 0.4600 0.4100 0.4249 0.3500 0.3700 0.4000 0.3600 0.4200 0.3200 0.3300 0.2400 0.3200 0.4000 0.4100 0.3500
[235] 0.3800 0.4000 0.3400 0.3600 0.2900 0.2993 0.1900 0.1900 0.3400 0.3500 0.2300 0.3700 0.4700 0.2600 0.3600 0.4000 0.1700 0.1600 0.4300 0.4400 0.4300 0.2600 0.5036 0.3500 0.4100 0.3500
[261] 0.3800 0.4300 0.3900 0.1700 0.2400 0.2800 0.3900 0.4400 0.2200 0.4000 0.3500 0.2200 0.3900 0.3800 0.4200 0.1800 0.4309 0.4400 0.2500 0.4000 0.4100 0.4500 0.2600 0.1700 0.2400 0.1800
[287] 0.2200 0.4200 0.2500 0.2000 0.2900 0.3800 0.2500 0.3400 0.2600 0.4700 0.3500 0.4400 0.3800 0.3800 0.3800 0.2400 0.4200 0.4200 0.4500 0.3600 0.4200 0.3700 0.2400 0.4200 0.3400 0.3600
[313] 0.3600 0.2000 0.3500 0.3900 0.4100 0.1900 0.4200 0.2600 0.3500 0.4100 0.4300 0.3400 0.5000 0.3500 0.3600 0.2100 0.3700 0.3000 0.4200 0.3200 0.4548 0.4600 0.2200 0.4600 0.4200 0.3900
[339] 0.2355 0.3600 0.4000 0.3800 0.4400 0.5100 0.4000 0.4400 0.4400 0.4000 0.2300 0.3500 0.3300 0.3600 0.4400 0.4700 0.1900 0.1900 0.4000 0.2300 0.3300 0.3400 0.3900 0.3700 0.4000 0.4800
[365] 0.3300 0.2900 0.3900 0.4800 0.3900 0.1717 0.4300 0.3600 0.3700 0.2700 0.3700 0.4100 0.2500 0.4400 0.2600 0.3500 0.4200 0.2000 0.3400 0.3600 0.4200 0.3900 0.4200 0.3600 0.2758 0.3800
[391] 0.4163 0.1456 0.2700 0.2800 0.4800 0.4200 0.4400

do_plot(dataset, "Total")
  [1] 0.2689269 0.4053836 0.3466801 0.3986557 0.3725990 0.3681300 0.3738009 0.3696465 0.3649996 0.3666888 0.3725019 0.3571270 0.3690654 0.3672767 0.3667678 0.3612580 0.3655683 0.3632867
 [19] 0.3644209 0.3426440 0.3538282 0.3470920 0.3503391 0.3488020 0.3477877 0.3368018 0.3460745 0.3481348 0.3379579 0.3450342 0.3547905 0.3520358 0.3495268 0.3544025 0.2509988 0.2200760
 [37] 0.2582524 0.2423742 0.2016838 0.2184150 0.2753050 0.2102208 0.2702699 0.2320738 0.2107871 0.2624782 0.2573334 0.2655985 0.2534757 0.3011025 0.2478012 0.1961402 0.3423928 0.4838027
 [55] 0.4846070 0.4355293 0.4825637 0.4039390 0.4019427 0.4052275 0.4076734 0.3981909 0.4023658 0.3980844 0.4070284 0.3996086 0.4015868 0.4013852 0.4122699 0.3931068 0.4037824 0.4078245
 [73] 0.4399034 0.4446557 0.4411182 0.4331665 0.4377528 0.4374316 0.4402415 0.4410547 0.4425131 0.4429377 0.4390349 0.4464279 0.4394534 0.4425301 0.4406133 0.4389744 0.4451141 0.4413384
 [91] 0.4458457 0.4376736 0.4463507 0.4399207 0.4429935 0.4419756 0.4414926 0.4456081 0.4418933 0.4367580 0.4424070 0.4413633 0.3936540 0.4014159 0.4040740 0.4101786 0.4057196 0.4036836
[109] 0.4019863 0.3958754 0.3959045 0.4031244 0.4078832 0.4010527 0.4099445 0.4078221 0.3988434 0.4357830 0.3748105 0.4111321 0.3493443 0.3860391 0.3838294 0.3733902 0.4354059 0.3283978
[127] 0.4326722 0.4123895 0.3999874 0.3941227 0.3555004 0.3983077 0.3969559 0.3248728 0.2610450 0.3766881 0.3971263 0.3450029 0.3424705 0.3259292 0.3474856 0.3420153 0.3470506 0.3383234
[145] 0.3531541 0.3433137 0.3555031 0.3303721 0.3451772 0.3478461 0.3560847 0.3324869 0.3358104 0.3410438 0.3516369 0.3390340 0.3526765 0.3514727 0.3293283 0.3525913 0.3279041 0.3430144
[163] 0.3375340 0.3615027 0.3482001 0.3503485 0.3446427 0.3816910 0.3341462 0.3766790 0.3891644 0.3959717 0.4637248 0.3610422 0.4662495 0.4652893 0.4237846 0.3562384 0.3826972 0.3492502
[181] 0.3647404 0.3741866 0.3745212 0.3802004 0.3707591 0.3811427 0.3693188 0.3760095 0.3670509 0.3733676 0.3772977 0.3731008 0.3797560 0.4992390 0.4016557 0.4569518 0.4203946 0.4272854
[199] 0.4193415 0.4317613 0.4218498 0.4205333 0.4209190 0.4184188 0.4195955 0.4210648 0.4214584 0.4260638 0.4142457 0.4264948 0.4264190 0.3903895 0.3809959 0.3750578 0.3697770 0.3766756
[217] 0.3865345 0.3786974 0.3790042 0.3782384 0.3877463 0.3741668 0.3783560 0.3616606 0.3767642 0.3870580 0.4339579 0.3822077 0.2504880 0.2479043 0.2823448 0.3181830 0.2630980 0.2739350
[235] 0.2353579 0.2818055 0.2473089 0.2793681 0.2859952 0.2755730 0.2600344 0.2577974 0.2313074 0.4615884 0.4600988 0.4591063 0.4198652 0.4244795 0.4281141 0.4226035 0.4206386 0.4292147
[253] 0.4267259 0.4337486 0.4216207 0.4262408 0.4303637 0.4274671 0.4218937 0.4263084 0.4178009 0.3916835 0.3986227 0.3930787 0.3977908 0.3927441 0.3921727 0.3919439 0.3908616 0.3915276
[271] 0.3845447 0.3958660 0.3893311 0.3925250 0.3875441 0.3927499 0.3804572 0.5016549 0.5012013 0.5090696 0.5060413 0.4782895 0.3979316 0.3691211 0.4231428 0.3672408 0.2841555 0.4057248
[289] 0.3888018 0.3659097 0.3887534 0.3623376 0.3988731 0.3968721 0.3948259 0.3590153 0.3988787 0.3609444 0.3524646 0.3603447 0.3595331 0.3585730 0.3509037 0.3533867 0.3692998 0.3678701
[307] 0.3713830 0.3673308 0.3699822 0.3672459 0.3604704 0.3470594 0.4936290 0.4846511 0.4937867 0.4517705 0.2310049 0.2087417 0.2236381 0.1961742 0.2095205 0.2133585 0.1852804 0.2182548
[325] 0.1926758 0.2618794 0.2205215 0.2255054 0.2366318 0.1912018 0.2118861 0.4679386 0.4635185 0.4411043 0.4234297 0.4201381 0.4259970 0.4128596 0.4168836 0.4239103 0.4188207 0.4178412
[343] 0.4110824 0.4194226 0.4205830 0.4149692 0.4251447 0.4255670 0.4267103 0.3611232 0.3562412 0.3712363 0.3442412 0.3668304 0.3530934 0.3678192 0.3678768 0.3515870 0.3597254 0.3603614
[361] 0.3413632 0.3419427 0.3676444 0.3581327 0.3448214 0.3504564 0.3566058 0.3487803 0.3563275 0.3526094 0.3450894 0.3524258 0.3419663 0.3558274 0.3509225 0.3524690 0.3497740 0.3659287
[379] 0.3460387 0.4186149 0.4315841 0.4196975 0.4142954 0.4272276 0.4220581 0.4237472 0.4237661 0.4246864 0.4101221 0.4252443 0.4297858 0.4326623 0.4259567 0.4291577 0.2363466 0.1910486
[397] 0.2086093 0.2369651 0.2038637 0.2544728 0.2414272 0.2393905 0.2225038 0.2322584 0.2416522 0.2338668 0.2226538 0.2313073 0.2465563 0.2219365 0.2574623 0.2559964 0.2548630 0.2434608
[415] 0.2871247 0.2602922 0.2597568 0.2582569 0.2696114 0.2577723 0.2594514 0.2572152 0.2509934 0.2176683 0.2275663 0.2251930 0.2266073 0.2271369 0.2169570 0.2411744 0.2112019 0.2195732
[433] 0.2221563 0.2232829 0.1962145 0.2118318 0.2516127 0.2430133 0.2631131 0.2298082 0.2515936 0.2392990 0.2432385 0.2488558 0.2573293 0.2709527 0.2426554 0.2671579 0.2662636 0.2748303
[451] 0.2224785 0.2580088 0.2470324 0.2634908 0.4650387 0.4685421 0.4281665 0.4091938 0.4089690 0.4045210 0.4082926 0.4060756 0.4081337 0.4070808 0.4094892 0.4030051 0.4036429 0.4134862
[469] 0.4034735 0.4154150 0.4083692 0.4148218 0.3539352 0.3598113 0.3540527 0.3573873 0.3649318 0.3451853 0.3355538 0.3580202 0.3532721 0.3573665 0.3641106 0.3563792 0.3523415 0.3563985
[487] 0.3574177 0.3701122 0.3523329 0.3646444 0.3612098 0.3583992 0.3555659 0.3709085 0.3518126 0.3516145 0.3414150 0.3529609 0.3661834 0.3551919 0.3525644 0.3640332 0.4071818 0.3967536
[505] 0.3799720 0.4161375 0.4142353 0.4020766 0.4111853 0.4010004 0.4031748 0.4107778 0.4011318 0.4057403 0.3997106 0.4079328 0.4144587 0.2605003 0.2797827 0.2599739 0.2659934 0.2595775
[523] 0.2811484 0.2789166 0.2714244 0.2377298 0.2646526 0.2668398 0.2620849 0.2688872 0.2634987 0.2582253 0.4082986 0.3943910 0.4309828 0.4016048 0.4046832 0.4689274 0.4829680 0.4506270
[541] 0.3172573 0.1955575 0.2735856 0.3523341 0.3769071 0.4503725 0.4765927 0.2692258 0.2595910 0.2754180 0.4301141 0.4878344 0.4779121 0.4524149 0.4761561 0.4372146 0.3746027 0.4318029
[559] 0.4324379 0.3444640 0.2238289 0.1923227 0.4754487 0.4731791 0.4765927 0.4356104 0.4483053 0.4089247
  [1] 0.2100 0.4400 0.3800 0.4100 0.3800 0.3700 0.3800 0.3700 0.3700 0.3700 0.3800 0.3500 0.3700 0.3700 0.3800 0.3600 0.3800 0.3800 0.3700 0.3400 0.3600 0.3400 0.3500 0.3500 0.3500 0.3300
 [27] 0.3400 0.3500 0.3300 0.3400 0.3600 0.3400 0.3500 0.3600 0.2800 0.2200 0.2700 0.2300 0.1700 0.2100 0.3100 0.1900 0.2400 0.2200 0.1900 0.2500 0.2100 0.2800 0.2400 0.3000 0.2200 0.1300
 [53] 0.3300 0.4900 0.4900 0.4800 0.4800 0.4100 0.4000 0.4000 0.4100 0.4000 0.4100 0.4000 0.4000 0.4000 0.4000 0.4100 0.4200 0.3900 0.4100 0.4100 0.4400 0.4500 0.4400 0.4300 0.4400 0.4500
 [79] 0.4400 0.4400 0.4500 0.4400 0.4400 0.4500 0.4400 0.4400 0.4500 0.4400 0.4500 0.4400 0.4500 0.4400 0.4500 0.4400 0.4400 0.4400 0.4400 0.4500 0.4500 0.4400 0.4500 0.4400 0.4100 0.4000
[105] 0.4000 0.4100 0.4100 0.4000 0.4200 0.3900 0.3900 0.4000 0.4100 0.4000 0.4200 0.4100 0.4000 0.4400 0.3600 0.4200 0.3200 0.3900 0.3900 0.4300 0.4400 0.3700 0.4400 0.4200 0.4200 0.4100
[131] 0.4200 0.4200 0.3000 0.2800 0.2000 0.3500 0.3900 0.3400 0.3400 0.3400 0.3500 0.3600 0.3500 0.3300 0.3600 0.3400 0.3600 0.3200 0.3600 0.3400 0.3700 0.3200 0.3300 0.3400 0.3500 0.3400
[157] 0.3500 0.3600 0.3200 0.3600 0.3200 0.3300 0.3300 0.3700 0.3500 0.3500 0.3200 0.4000 0.3200 0.3500 0.3000 0.4100 0.4600 0.3400 0.4700 0.4700 0.4700 0.3100 0.3900 0.3800 0.3600 0.3800
[183] 0.3700 0.3800 0.3700 0.3900 0.3900 0.3900 0.3700 0.3500 0.3800 0.3800 0.3800 0.5400 0.4500 0.4700 0.4200 0.4300 0.4200 0.4400 0.4300 0.4200 0.4300 0.4200 0.4200 0.4300 0.4300 0.4300
[209] 0.4100 0.4300 0.4300 0.3900 0.4000 0.3800 0.3700 0.3800 0.3900 0.3800 0.3800 0.3900 0.3900 0.3700 0.3800 0.3500 0.3800 0.4000 0.4900 0.4000 0.2400 0.2400 0.3200 0.3200 0.2500 0.2800
[235] 0.2200 0.3000 0.2400 0.2800 0.2900 0.2900 0.2600 0.2400 0.2100 0.4600 0.4600 0.4600 0.4300 0.4300 0.4300 0.4200 0.4300 0.4300 0.4300 0.4400 0.4200 0.4300 0.4300 0.4300 0.4300 0.4300
[261] 0.4300 0.3800 0.4000 0.3800 0.4000 0.3900 0.3900 0.3900 0.3900 0.3900 0.3800 0.4000 0.3900 0.3900 0.3800 0.3900 0.4200 0.5000 0.5100 0.5200 0.5100 0.4900 0.4300 0.3200 0.4400 0.4200
[287] 0.1800 0.4200 0.3700 0.3800 0.3700 0.3600 0.3800 0.4100 0.2900 0.4300 0.4000 0.3600 0.3500 0.3600 0.3600 0.3600 0.3400 0.3900 0.3800 0.3700 0.3800 0.3700 0.3800 0.3700 0.3600 0.3400
[313] 0.5000 0.5000 0.5000 0.4700 0.1900 0.1800 0.2300 0.1700 0.2100 0.2800 0.1600 0.2200 0.1700 0.2800 0.2300 0.1300 0.2600 0.1700 0.2100 0.4800 0.4600 0.4400 0.4300 0.4100 0.4300 0.4100
[339] 0.4100 0.4300 0.4200 0.4100 0.4200 0.4200 0.4200 0.4200 0.4300 0.4300 0.4300 0.3700 0.3800 0.3800 0.3600 0.3700 0.3500 0.3700 0.3700 0.3800 0.3800 0.3600 0.3300 0.3300 0.3800 0.3600
[365] 0.3400 0.3500 0.3600 0.3500 0.3500 0.3500 0.3400 0.3700 0.3300 0.3600 0.3600 0.3600 0.3500 0.3700 0.3500 0.4300 0.4400 0.4000 0.4100 0.4300 0.4100 0.4200 0.4000 0.4300 0.4000 0.4000
[391] 0.4300 0.4400 0.4100 0.4300 0.2200 0.1600 0.1900 0.2400 0.1900 0.2700 0.2500 0.2500 0.2100 0.2700 0.2400 0.2000 0.2200 0.2400 0.2600 0.1900 0.2700 0.2600 0.2400 0.2400 0.3100 0.3100
[417] 0.1600 0.2600 0.2800 0.2600 0.2700 0.2600 0.2600 0.1900 0.2300 0.2200 0.2800 0.2600 0.2000 0.2500 0.2000 0.2000 0.2000 0.2300 0.1700 0.1900 0.2700 0.2500 0.2900 0.2200 0.2500 0.2300
[443] 0.2400 0.2500 0.2600 0.2700 0.2400 0.2800 0.3000 0.2900 0.2000 0.2800 0.2200 0.2800 0.4700 0.4900 0.4400 0.4100 0.4100 0.4000 0.4000 0.4100 0.4100 0.4000 0.4100 0.4000 0.4000 0.4200
[469] 0.4000 0.4200 0.4000 0.4200 0.3500 0.3600 0.3500 0.3600 0.3700 0.3400 0.3200 0.3600 0.3500 0.3600 0.3700 0.3500 0.3500 0.3600 0.3700 0.3800 0.3500 0.3700 0.3600 0.3600 0.3500 0.3800
[495] 0.3500 0.3500 0.3300 0.3500 0.3700 0.3800 0.3500 0.3700 0.4000 0.3900 0.3600 0.4200 0.4200 0.4000 0.4200 0.4000 0.4100 0.3900 0.4000 0.4000 0.4000 0.4100 0.4200 0.3000 0.2900 0.1500
[521] 0.3100 0.2600 0.2900 0.3000 0.2800 0.2200 0.2700 0.2700 0.2100 0.3100 0.2200 0.2900 0.4249 0.4247 0.4438 0.4309 0.4163 0.5036 0.5266 0.4806 0.2993 0.1456 0.2758 0.3623 0.3768 0.4900
[547] 0.4800 0.1926 0.2355 0.1979 0.4837 0.5045 0.4868 0.4546 0.4965 0.4148 0.4023 0.4305 0.3370 0.3539 0.2480 0.1717 0.5105 0.4794 0.4800 0.4548 0.4485 0.4037

model_per
Random Forest 

397 samples
106 predictors

No pre-processing
Resampling: Cross-Validated (10 fold) 
Summary of sample sizes: 357, 358, 357, 355, 357, 359, ... 
Resampling results across tuning parameters:

  mtry  splitrule   RMSE        Rsquared   MAE       
    2   variance    0.03663598  0.8118797  0.02423285
    2   extratrees  0.03786082  0.8096785  0.02568269
   54   variance    0.03310826  0.8322125  0.02154125
   54   extratrees  0.03269921  0.8385388  0.02138579
  106   variance    0.03410869  0.8203200  0.02174119
  106   extratrees  0.03246248  0.8399941  0.02118672

Tuning parameter 'min.node.size' was held constant at a value of 5
RMSE was used to select the optimal model using the smallest value.
The final values used for the model were mtry = 106, splitrule = extratrees and min.node.size = 5.
# install.packages("gridExtra")
library(gridExtra)
library(parallel)

plot_column <- function(column, importance){
  results <- data.frame(x = dataset$tmg, y = dataset[[column]])
  plt <- ggplot(results, aes(x = x, y = y)) +
    geom_point(color='blue') +
     xlab("") +
    ylab(column) +
    theme_bw()
  return(plt)  
}

plot_important <- function(important, title, n_to_plot=6, columns=2) grid.arrange(top=title, grobs = lapply(important$predictor[1:n_to_plot], plot_column, importance=important$importance[1:n_to_plot]), ncol = columns)

# plot_list1 <- lapply(top1$predictor[1:6], plot_column)
# grid.arrange(top="Alpha=1", grobs = plot_list1, ncol = 2)

# plot_list0 <- lapply(top0$predictor[1:6], plot_column)
# grid.arrange(top="Alpha=0", grobs = plot_list0, ncol = 2)
top0 <- model_imp
png("rf_permutation_improtance.png", width = 1000, height = 1200)
plot_important(top0, n_to_plot=20, columns=4, "RF Permutation Importance")
dev.off()
null device 
          1 

# Install and load necessary libraries
# install.packages("ggplot2")
library(ggplot2)

# Your data
data <- read.table(text = "  mtry  splitrule   RMSE        Rsquared   MAE       
    2   variance    0.03663598  0.8118797  0.02423285
    2   extratrees  0.03786082  0.8096785  0.02568269
   54   variance    0.03310826  0.8322125  0.02154125
   54   extratrees  0.03269921  0.8385388  0.02138579
  106   variance    0.03410869  0.8203200  0.02174119
  106   extratrees  0.03246248  0.8399941  0.02118672", header = TRUE)

# Plotting using ggplot2
ggplot(data, aes(x = factor(mtry), y = RMSE, fill = splitrule)) +
  geom_bar(stat = "identity", position = "dodge") +
  labs(title = "Random Forest RMSE by mtry and splitrule",
       x = "mtry",
       y = "RMSE") +
  theme_minimal()

---
title: "Regression Model using Ranger for FeNi"
output: html_notebook
---

```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = TRUE)
```

## 1. Reading the dataset
```{r}
library(readr)
dataset <- read_csv("new_dataset.csv")
head(dataset)

```
## 2. Splitting the dataset
We will split the dataset into a training set (70%) and a testing set (30%).
```{r}
set.seed(123) # for reproducibility
sample_index <- sample(1:nrow(dataset), 0.7*nrow(dataset))
train_data <- dataset[sample_index, ]
test_data <- dataset[-sample_index, ]

```

## 3. Creating a regression model
Using the ranger package, we'll predict the `tmg` feature and using permutation

```{r}
library(ranger)
model_per <- ranger(tmg ~ ., data = train_data, importance = 'permutation')
model_per
```


## 4. Calculate importance with permutation

```{r}
library(dplyr)
library(ggplot2)

plot_perm <- importance(model_per) |> as.data.frame() |> add_rownames("predictor") |> mutate(importance=`importance(model_per)`) |> select(predictor,importance)|> arrange(desc(importance)) |> head(20) |> mutate(predictor = factor(predictor, levels = rev(unique(predictor))))  |>
  ggplot()+
  geom_col(aes(y=predictor,x=importance),fill='darkblue', color='gray')+
  ggtitle("Top 20 predictor importance using permutation")+
  theme_minimal()
plot_perm
```


## 5. calculate importante with impurity
```{r}
library(ranger)
model_imp <- ranger(tmg ~ ., data = train_data, importance = 'impurity')
model_imp
```

```{r}
plot_imp<-importance(model_imp) |> as.data.frame() |> add_rownames("predictor") |> mutate(importance=`importance(model_imp)`) |> select(predictor,importance)|> arrange(desc(importance)) |> head(20) |> mutate(predictor = factor(predictor, levels = rev(unique(predictor))))  |>
  ggplot()+
  geom_col(aes(y=predictor,x=importance),fill='darkblue', color='gray')+
  ggtitle("Top 20 predictor importance using impurity")+
  theme_minimal()
```

## Permutation vs. Impurity
```{r}
library(gridExtra)
gridExtra::grid.arrange(plot_imp,plot_perm, ncol=2)
plot_imp
plot_perm
```
## 5. Evaluate results on test dataset
```{r}
predictions <- predict(model_per, data = test_data)$predictions
# Compute the RMSE (Root Mean Square Error)
RMSE <- sqrt(mean((predictions - test_data$tmg)^2))
RMSE
predictions <- predict(model_per, data = dataset)$predictions
# Compute the RMSE (Root Mean Square Error)
RMSE <- sqrt(mean((predictions - dataset$tmg)^2))
RMSE
```
## 6. Plot: Predicted vs Reference values
```{r}
library(ggplot2)

predictions <- predict(model_per, data = test_data)$predictions
results <- data.frame(Reference = test_data$tmg, Predicted = predictions)
ggplot(results, aes(x = Reference, y = Predicted)) +
  geom_point(color='blue') +
  #geom_smooth(method = 'lm', color = 'red') +
  geom_abline(intercept = 0,slope =1,color='red')+
  ggtitle("Predicted vs Reference values") +
  #ylim(0,1)+
  xlab("Reference Values") +
  ylab("Predicted Values")+
  theme_bw()

predictions <- predict(model_per, data = train_data)$predictions
results <- data.frame(Reference = train_data$tmg, Predicted = predictions)
ggplot(results, aes(x = Reference, y = Predicted)) +
  geom_point(color='blue') +
  #geom_smooth(method = 'lm', color = 'red') +
  geom_abline(intercept = 0,slope =1,color='red')+
  ggtitle("Predicted vs Reference values") +
  #ylim(0,1)+
  xlab("Reference Values") +
  ylab("Predicted Values")+
  theme_bw()

predictions <- predict(model_per, data = dataset)$predictions
results <- data.frame(Reference = dataset$tmg, Predicted = predictions)
ggplot(results, aes(x = Reference, y = Predicted)) +
  geom_point(color='blue') +
  #geom_smooth(method = 'lm', color = 'red') +
  geom_abline(intercept = 0,slope =1,color='red')+
  ggtitle("Predicted vs Reference values") +
  #ylim(0,1)+
  xlab("Reference Values") +
  ylab("Predicted Values")+
  theme_bw()

```


```{r}
# Install and load necessary packages
# install.packages("ranger")
# install.packages("caret")
library(ranger)
library(caret)
library(readr)
library(caret)
library(ranger)
library(caret)
library(dplyr)



```

```{r}


library(readr)
library(caret)
library(ranger)
library(caret)
library(dplyr)
dataset <- read_csv("new_dataset.csv")
nzv <- nearZeroVar(dataset, saveMetrics = FALSE)
dataset <- dataset[, -nzv]
dataset$name <- NULL
set.seed(123) # for reproducibility
sample_index <- sample(1:nrow(dataset), 0.7*nrow(dataset))
train_data <- dataset[sample_index, ]
test_data <- dataset[-sample_index, ]
# mod_2=rgcv(tmg ~ .,data = train_data, importance = 'permutation', cv.fold=10)
model_per <- train(
  tmg ~ .,
  data = train_data,
  method = "ranger",
  trControl = trainControl(method = "cv", number = 10),
  importance = "permutation"
)
# perm_imp <- permutationFeature(model_per, metric = "RMSE", trControl = ctrl)

# model_per <- ranger(tmg ~ .,data = train_data, importance = 'permutation')

do_plot <- function(data, type) {
  predictions <- predict(model_per, data)
  print(data)
  print(predictions)
  print(data$tmg)
  RMSE <- sqrt(mean((predictions - data$tmg)^2))
  results <- data.frame(Reference = data$tmg, Predicted = as.vector(predictions))
  return(ggplot(results, aes(x = Reference, y = Predicted)) +
    geom_point(color='blue') +
    geom_abline(intercept = 0, slope = 1, color='red') +
    ggtitle(paste(type, "Random Forest; RMSE:", RMSE)) +
    xlab("Reference Values") +
    ylab("Predicted Values") +
    theme_bw())
}
do_plot(test_data, "Test")
do_plot(train_data, "Train")
do_plot(dataset, "Total")
model_per
```
```{r}
# Extract permutation importance measures
perm_importance <- varImp(model_per)

# Print the permutation importance measures
print(perm_importance)

# Plot permutation importance
plot(perm_importance)

model_imp <- data.frame(perm_importance$importance)
model_imp$predictor <- rownames(model_imp)
model_imp$importance <- model_imp$Overall
model_imp
model_imp <- model_imp |> select(predictor,importance)|> arrange(desc(importance))
plot_imp <- model_imp |> head(20) |> mutate(predictor = factor(predictor, levels = rev(unique(predictor))))  |>
  ggplot()+
  geom_col(aes(y=predictor,x=importance),fill='darkblue', color='gray')+
  ggtitle("Top 20 predictor importance using permutation")+
  theme_minimal()
dataset
plot_imp
```

```{r}
# install.packages("gridExtra")
library(gridExtra)
library(parallel)

plot_column <- function(column, importance){
  results <- data.frame(x = dataset$tmg, y = dataset[[column]])
  plt <- ggplot(results, aes(x = x, y = y)) +
    geom_point(color='blue') +
     xlab("") +
    ylab(column) +
    theme_bw()
  return(plt)  
}

plot_important <- function(important, title, n_to_plot=6, columns=2) grid.arrange(top=title, grobs = lapply(important$predictor[1:n_to_plot], plot_column, importance=important$importance[1:n_to_plot]), ncol = columns)

# plot_list1 <- lapply(top1$predictor[1:6], plot_column)
# grid.arrange(top="Alpha=1", grobs = plot_list1, ncol = 2)

# plot_list0 <- lapply(top0$predictor[1:6], plot_column)
# grid.arrange(top="Alpha=0", grobs = plot_list0, ncol = 2)
top0 <- model_imp
png("rf_permutation_improtance.png", width = 1000, height = 1200)
plot_important(top0, n_to_plot=20, columns=4, "RF Permutation Importance")
dev.off()
```


```{r}

# Install and load necessary libraries
# install.packages("ggplot2")
library(ggplot2)

# Your data
data <- read.table(text = "  mtry  splitrule   RMSE        Rsquared   MAE       
    2   variance    0.03663598  0.8118797  0.02423285
    2   extratrees  0.03786082  0.8096785  0.02568269
   54   variance    0.03310826  0.8322125  0.02154125
   54   extratrees  0.03269921  0.8385388  0.02138579
  106   variance    0.03410869  0.8203200  0.02174119
  106   extratrees  0.03246248  0.8399941  0.02118672", header = TRUE)

# Plotting using ggplot2
ggplot(data, aes(x = factor(mtry), y = RMSE, fill = splitrule)) +
  geom_bar(stat = "identity", position = "dodge") +
  labs(title = "Random Forest RMSE by mtry and splitrule",
       x = "mtry",
       y = "RMSE") +
  theme_minimal()
```
